In [2]:
%matplotlib inline

from matplotlib import pyplot as plt
import matplotlib.mlab as mlab
import csv
from scipy.stats import norm
import numpy as np
import scipy.stats as stats

In [3]:
data = open('../data/data.csv', 'r').readlines()
fieldnames = ['x', 'y', 'z', 'unmasked', 'synapses']
reader = csv.reader(data)
reader.next()

rows = [[int(col) for col in row] for row in reader]

In [4]:
#This gets a set of all the z and y values

sorted_y = sorted(list(set([r[1] for r in rows])))
sorted_z = sorted(list(set([r[2] for r in rows])))
# y vals = [1369, 1408, 1447, 1486, 1525, 1564, 1603, 1642, 1681, 
            #1720, 1759, 1798, 1837, 1876, 1915, 1954, 1993, 2032, 
            #2071, 2110, 2149, 2188, 2227, 2266, 2305, 2344, 2383, 
            #2422, 2461, 2500, 2539, 2578, 2617, 2656, 2695, 2734, 
            #2773, 2812, 2851, 2890, 2929, 2968, 3007, 3046, 3085, 
            #3124, 3163, 3202, 3241, 3280, 3319, 3358]

In [48]:
#Can we plot histograms representing the distribution of synapses in each layer in the Y direction?
#What can we infer?
for i in sorted_y:
    unmaskedSynapsesNoZero = ([r[-1] for r in rows if r[-2] != 0 if r[-1] !=0 if r[1] == i])
    mean = np.mean(unmaskedSynapsesNoZero)
    variance = np.var(unmaskedSynapsesNoZero)
    plt.hist(unmaskedSynapsesNoZero, bins=50)
    plt.title("Layer " + str(i))
    plt.show()
    print "Layer " + str(i) + " has a mean: " + str(mean) + " and variance: " + str(variance)
    
#Synapse density and distribution radically decreases in the deeper layers


Layer 1369 has a mean: 172.90655885 and variance: 6352.09908546
Layer 1408 has a mean: 169.21217547 and variance: 6661.21639607
Layer 1447 has a mean: 168.855734767 and variance: 6611.02488647
Layer 1486 has a mean: 174.01170117 and variance: 6911.43190629
Layer 1525 has a mean: 176.261301989 and variance: 6257.14058203
Layer 1564 has a mean: 176.981046931 and variance: 6650.63411732
Layer 1603 has a mean: 176.452898551 and variance: 6950.61734667
Layer 1642 has a mean: 180.980108499 and variance: 6360.64155731
Layer 1681 has a mean: 185.218181818 and variance: 6529.25966942
Layer 1720 has a mean: 186.479564033 and variance: 6469.62832897
Layer 1759 has a mean: 185.626811594 and variance: 6191.14152752
Layer 1798 has a mean: 177.955575703 and variance: 5969.19022956
Layer 1837 has a mean: 172.568949772 and variance: 5907.67355643
Layer 1876 has a mean: 175.453467153 and variance: 5842.77520696
Layer 1915 has a mean: 178.257064722 and variance: 5756.02689859
Layer 1954 has a mean: 175.255454545 and variance: 5803.31383388
Layer 1993 has a mean: 171.575342466 and variance: 5749.0571089
Layer 2032 has a mean: 163.039233577 and variance: 5724.03951912
Layer 2071 has a mean: 161.126142596 and variance: 5691.29670231
Layer 2110 has a mean: 167.531934307 and variance: 5467.55007509
Layer 2149 has a mean: 169.118874773 and variance: 5467.93595953
Layer 2188 has a mean: 172.001814882 and variance: 5078.99636694
Layer 2227 has a mean: 165.93715847 and variance: 5164.74559557
Layer 2266 has a mean: 157.211050725 and variance: 5149.05600107
Layer 2305 has a mean: 153.248860529 and variance: 5381.16596269
Layer 2344 has a mean: 157.020128088 and variance: 4993.5476278
Layer 2383 has a mean: 160.717925387 and variance: 5055.91861408
Layer 2422 has a mean: 157.729261623 and variance: 4913.31867885
Layer 2461 has a mean: 154.058394161 and variance: 4619.36520326
Layer 2500 has a mean: 149.732664234 and variance: 4826.30353159
Layer 2539 has a mean: 144.028362306 and variance: 4449.58839961
Layer 2578 has a mean: 149.671875 and variance: 4313.72045898
Layer 2617 has a mean: 147.722120658 and variance: 4349.4492913
Layer 2656 has a mean: 140.963302752 and variance: 4186.54727717
Layer 2695 has a mean: 138.975183824 and variance: 4319.83853857
Layer 2734 has a mean: 141.383194829 and variance: 3885.83746459
Layer 2773 has a mean: 135.741219963 and variance: 3865.90530646
Layer 2812 has a mean: 129.258953168 and variance: 4144.07252085
Layer 2851 has a mean: 125.985347985 and variance: 3874.71407103
Layer 2890 has a mean: 122.108555658 and variance: 3722.07837206
Layer 2929 has a mean: 117.620593692 and variance: 3686.00725679
Layer 2968 has a mean: 111.225621415 and variance: 3869.58389421
Layer 3007 has a mean: 100.810218978 and variance: 3974.72310725
Layer 3046 has a mean: 103.269230769 and variance: 4474.30972633
Layer 3085 has a mean: 97.0806223479 and variance: 4458.22970937
Layer 3124 has a mean: 82.3756432247 and variance: 3982.13504997
Layer 3163 has a mean: 66.625 and variance: 3622.60279605
Layer 3202 has a mean: 61.7955390335 and variance: 3243.32250798
Layer 3241 has a mean: 35.5898876404 and variance: 1299.2194483
Layer 3280 has a mean: 9.50526315789 and variance: 64.6499722992
Layer 3319 has a mean: 3.93181818182 and variance: 8.79080578512
Layer 3358 has a mean: 1.4 and variance: 0.24

In [49]:
#Can we fit a gaussian distribution to each layer?

for i in sorted_y:
    unmaskedSynapsesNoZero = ([r[-1] for r in rows if r[-2] != 0 if r[-1] !=0 if r[1] == i])
    # best fit of data
    (mu, sigma) = norm.fit(unmaskedSynapsesNoZero)

    # the histogram of the data
    n, bins, patches = plt.hist(unmaskedSynapsesNoZero, 60, normed=1, facecolor='green', alpha=0.75)

    # add a 'best fit' line
    y = mlab.normpdf( bins, mu, sigma)
    l = plt.plot(bins, y, 'r--', linewidth=2)

    #plot
    plt.xlabel("Layer " + str(i))
    plt.ylabel('Probability')
    plt.title(r'$\mathrm{Histogram\ of\ Layer %.f}\ \mu=%.3f,\ \sigma=%.3f$' %(i, mu, sigma))
    plt.grid(True)

    plt.show()
    
#Gaussian does not quite fit



In [50]:
from sklearn import mixture 
def fit_samples(samples):
    gmix = mixture.GMM(n_components=2, covariance_type='full')
    gmix.fit(samples)
    print gmix.means_
    colors = ['r' if i==0 else 'g' for i in gmix.predict(samples)]
    ax = plt.gca()
    ax.scatter(samples[:,0], samples[:,1], c=colors, alpha=0.8)
    plt.show()

In [53]:
#Can we fit gamma distribution?
#Gamma does not quite fit
for i in sorted_y:
    unmaskedSynapsesNoZero = ([r[-1] for r in rows if r[-2] != 0 if r[-1] !=0 if r[1] == i])

    alpha, loc, beta=stats.gamma.fit(unmaskedSynapsesNoZero)

    rv = stats.gamma(alpha,loc,beta)

    h = plt.hist(unmaskedSynapsesNoZero, normed=True, bins=50)
    x = np.linspace(0,400)
    plt.plot(x, rv.pdf(x), lw=2)
    plt.show()



In [54]:
#Attempt to plot the entire data set in 3d in order to find some other characteristics?
#The plot does not look that great. Too dense. Needs adjustment
from mpl_toolkits.mplot3d import Axes3D

fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')

#print unmaskedSynapsesNoZero
x = []
y = []
z = []
val = []

for r in rows:
    if r[-2] != 0:
        if r[-1] !=0:
            x.append(r[0])
            y.append(r[1])
            z.append(r[2])
            
ax.scatter(x, y, z, c='b')
plt.show()



In [20]:
#until now we have not taken into account the value in the unmasked column
#From what I understand unmasked is equal to the amount of 'unmasked' voxels in each 
#super voxel that were looked at due to the fact that other areas were deemed not 
#to have important information.
#So the true value of synapses per super voxel is synapse number divided by unmasked number
#Can we fit a Gaussian to this distribution?
s = []
for r in rows:
    if r[-2] != 0:
        if r[-1] !=0:
            s.append(float(r[-1])/float(r[-2]))
    
#GAUSSIAN
# best fit of data normal curve (gaussian)
(mu, sigma) = norm.fit(s)
# the histogram of the data
n, bins, patches = plt.hist(s, 60, normed=1, facecolor='green', alpha=0.75)

# add a 'best fit' line
y = mlab.normpdf( bins, mu, sigma)
l = plt.plot(bins, y, 'r--', linewidth=2)

plt.show()



In [23]:
from lmfit.models import SkewedGaussianModel

s = []
for r in rows:
    if r[-2] != 0:
        if r[-1] !=0:
            s.append(float(r[-1])/float(r[-2]))


#GAUSSIAN
# best fit of data normal curve (gaussian)
(mu, sigma) = norm.fit(s)
# the histogram of the data
n, bins, patches = plt.hist(s, 60, normed=1, facecolor='green', alpha=0.75)


#xvals, yvals = read_your_histogram()

model = SkewedGaussianModel()

# set initial parameter values
params = model.make_params(amplitude=10, center=0, sigma=1, gamma=0)

# adjust parameters  to best fit data.
result = model.fit(n, params, x=bins[:60])

print(result.fit_report())
#pylab.plot(xvals, yvals)
plt.plot(bins[:60], result.best_fit, 'b--', linewidth=2) 



# add a 'best fit' line
y = mlab.normpdf( bins, mu, sigma)
l = plt.plot(bins, y, 'r--', linewidth=2)
plt.xticks(np.arange(0, .004, .001))
plt.yticks(np.arange(0, 1200, 200))
plt.title("Distribution Of Synapes Density Accounting for Unmasked",fontsize =20 )
plt.xlabel("Synapse Density", fontsize =20)
plt.ylabel("Count",fontsize =20)
plt.legend(['Fitted SkewedGaussian', 'Fitted Gaussian'], loc='upper right')
plt.show()


[[Model]]
    Model(skewed_gaussian)
[[Fit Statistics]]
    # function evals   = 367
    # data points      = 60
    # variables        = 4
    chi-square         = 102305.249
    reduced chi-square = 1826.879
    Akaike info crit   = 458.622
    Bayesian info crit = 466.999
[[Variables]]
    amplitude:   0.98654103 +/- 0.015933 (1.62%) (init= 10)
    sigma:       0.00064406 +/- 1.96e-05 (3.04%) (init= 1)
    center:      0.00157955 +/- 1.22e-05 (0.77%) (init= 0)
    gamma:      -2.75584380 +/- 0.262922 (9.54%) (init= 0)
[[Correlations]] (unreported correlations are <  0.100)
    C(sigma, center)             =  0.809 
    C(sigma, gamma)              = -0.777 
    C(center, gamma)             = -0.721 
    C(amplitude, sigma)          =  0.449 
    C(amplitude, center)         =  0.303 


In [28]:
#take into account unmasked for each layer
#Both Gaussian and Gamma give somewhat nice curves
from lmfit.models import SkewedGaussianModel
import pylab

for i in sorted_y:
    s = []
    for r in rows:
        if r[-2] != 0:
            if r[-1] !=0:
                if r[1] == i:
                    s.append(float(r[-1])/float(r[-2]))
                    
    # the histogram of the data
    n, bins, patches = plt.hist(s, 60, normed=1, facecolor='green', alpha=0.75)
    
    
    model = SkewedGaussianModel()
    params = model.make_params(amplitude=10, center=0, sigma=1, gamma=0)

    # adjust parameters  to best fit data.
    result = model.fit(n, params, x=bins[:60])

    #print(result.fit_report())
    #pylab.plot(xvals, yvals)
    plt.plot(bins[:60], result.best_fit, 'b--', linewidth=2) 
    
    #GAUSSIAN
    # best fit of data normal curve (gaussian)
    (mu, sigma) = norm.fit(s)
    # add a 'best fit' line
    y = mlab.normpdf( bins, mu, sigma)
    l = plt.plot(bins, y, 'r--', linewidth=2)
    plt.xticks(np.arange(0, .0035, .001))
    plt.yticks(np.arange(0, 1600, 200))
    plt.xlim([0, .0035])
    plt.ylim([0,1600])
    plt.suptitle("Distribution Of Density in Y Layer: "+ str(i),fontsize = 20)
    plt.xlabel("Synapse Density", fontsize = 20)
    plt.ylabel("Count",fontsize = 20)
    plt.legend(['Fitted SkewedGaussian', 'Fitted Gaussian'], loc='upper right',bbox_to_anchor=(1.3, 0.5))
    pylab.savefig('Y'+str(i)+'.png')
    plt.show()



In [57]:
#Does a colored scatter plot based on synapse concentration gie us anymore insight?
import matplotlib
#print unmaskedSynapsesNoZero

allVal = []
for r in rows:
    if r[-2] != 0:
            if r[-1] !=0:
                allVal.append(r[-1])
                
norm = matplotlib.colors.Normalize(vmin = np.min(allVal), vmax = np.max(allVal), clip = False)

for i in sorted_y:
    x = []
    z = []
    val = []

    for r in rows:
        if r[-2] != 0:
            if r[-1] !=0:
                if r[1] == i:
                    x.append(r[0])
                    z.append(r[2])
                    val.append(r[-1])
            
    plt.scatter(x,z,s = 50, c = val,norm=norm)
    plt.show()



In [59]:
#This is broken !! Attempt to fix color map in full volume notebook

x = []
z = []
val = []
for r in rows:
    if r[-2] != 0:
        if r[-1] !=0:
            if r[1] == 1369:
                x.append(r[0])
                z.append(r[2])
                val.append(r[-1])

val = np.array(val)
x = np.array(x)
z= np.array(z)


plt.pcolormesh(x, z, val)
plt.colorbar() 
plt.show()


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-59-9d241c4d2a57> in <module>()
     17 
     18 
---> 19 plt.pcolormesh(x, z, val)
     20 plt.colorbar()
     21 plt.show()

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/pyplot.pyc in pcolormesh(*args, **kwargs)
   2944         ax.hold(hold)
   2945     try:
-> 2946         ret = ax.pcolormesh(*args, **kwargs)
   2947         draw_if_interactive()
   2948     finally:

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/axes.pyc in pcolormesh(self, *args, **kwargs)
   7734         allmatch = (shading == 'gouraud')
   7735 
-> 7736         X, Y, C = self._pcolorargs('pcolormesh', *args, allmatch=allmatch)
   7737         Ny, Nx = X.shape
   7738 

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/axes.pyc in _pcolorargs(funcname, *args, **kw)
   7349         if len(args) == 3:
   7350             X, Y, C = args
-> 7351             numRows, numCols = C.shape
   7352         else:
   7353             raise TypeError(

ValueError: need more than 1 value to unpack

In [60]:
#This is just a test to see what the same plot would look like in the Z direction
allVal = []
for r in rows:
    if r[-2] != 0:
            if r[-1] !=0:
                allVal.append(float(r[-1])/r[-2])
                
norm = matplotlib.colors.Normalize(vmin = np.min(allVal), vmax = np.max(allVal), clip = False)

for i in sorted_z:
    x = []
    y = []
    val = []

    for r in rows:
        if r[-2] != 0:
            if r[-1] !=0:
                if r[2] == i:
                    x.append(r[0])
                    y.append(r[1])
                    val.append(float(r[-1])/r[-2])
            
    plt.scatter(x,y,s = 10, c = val,norm=norm)
    plt.show()



In [61]:
# Is there a relationship between synapse count and unmasked?
#Is high synapse count correlated with high unmasked?
#Yes! but this makes sense because only voxels that were deemed to have important information were unmasked
#Masked voxels probably had unimportant tissue unrelated to synapses
unmaskedNotZeroUnmask = []
valNotZeroUnmask= [] 

unmaskedNotZeroVal = []
valNotZeroVal =[]

unmaskedBothNotZero =[]
valBothNotZero = []

for r in rows:
    if r[-2] != 0: #unmasked not zero
        unmaskedNotZeroUnmask.append(r[-2])
        valNotZeroUnmask.append(r[-1])
    if r[-1] !=0: #value is not zero
        unmaskedNotZeroVal.append(r[-2])
        valNotZeroVal.append(r[-1])
    if r[2] != 0: 
        if r[-1] !=0:
            unmaskedBothNotZero.append(r[-2])
            valBothNotZero.append(r[-1])
        
        
#all data
corr = stats.pearsonr(rows[-1],rows[-2])
print corr

#Unmasked is not zero
corr = stats.pearsonr(unmaskedNotZeroUnmask, valNotZeroUnmask)
print corr

#Value is not zero
corr = stats.pearsonr(unmaskedNotZeroVal, valNotZeroVal)
print corr

#Both are not zero
corr = stats.pearsonr(unmaskedBothNotZero, valBothNotZero)
print corr


(0.99968377732048319, 6.7499845347884113e-06)
(0.86770155069080035, 0.0)
(0.82158123102297553, 0.0)
(0.82158123102297553, 0.0)

In [62]:
import pandas as pd

columns = ['Mean','Variance', 'Median', 'Min', 'Max', 'Mean Account For Unmasked',
           'Variance Account For Unmasked', 'Median Account For Unmasked',
           'Min Account For Unmasked', 'Max Account For Unmasked']
index = sorted_y
df = pd.DataFrame(index=index, columns=columns)

for i in sorted_y:
    syn = []
    synAccountUn = []
    for r in rows:
        if r[-2] != 0:
            if r[-1] !=0:
                if r[1] == i:
                    synAccountUn.append(float(r[-1])/r[-2])
                    syn.append(r[-1])
    nextRow = [np.mean(syn),  np.var(syn), np.median(syn), min(syn), max(syn),np.mean(synAccountUn),
               np.var(synAccountUn), np.median(synAccountUn), min(synAccountUn), max(synAccountUn)]
    df.loc[i] = nextRow

print df


         Mean Variance Median Min  Max Mean Account For Unmasked  \
1369  172.907   6352.1    193   1  356                0.00127862   
1408  169.212  6661.22    191   1  336                0.00124971   
1447  168.856  6611.02    191   1  336                 0.0012546   
1486  174.012  6911.43    199   1  349                0.00127785   
1525  176.261  6257.14    196   1  481                0.00128058   
1564  176.981  6650.63    196   1  383                0.00129012   
1603  176.453  6950.62    199   1  358                0.00127574   
1642   180.98  6360.64    198   1  366                0.00130033   
1681  185.218  6529.26    207   1  344                0.00133697   
1720   186.48  6469.63    206   1  374                0.00134581   
1759  185.627  6191.14    204   1  361                0.00133767   
1798  177.956  5969.19    197   1  374                0.00129808   
1837  172.569  5907.67    187   1  376                0.00126365   
1876  175.453  5842.78  191.5   1  507                0.00127681   
1915  178.257  5756.03    195   1  370                0.00127792   
1954  175.255  5803.31    192   1  340                0.00126328   
1993  171.575  5749.06    188   1  368                0.00125556   
2032  163.039  5724.04    178   1  373                0.00119504   
2071  161.126   5691.3    175   1  347                0.00119115   
2110  167.532  5467.55    183   1  355                0.00121803   
2149  169.119  5467.94    185   1  327                0.00122822   
2188  172.002     5079    186   1  454                0.00124232   
2227  165.937  5164.75    182   1  398                0.00120688   
2266  157.211  5149.06  175.5   1  340                0.00115771   
2305  153.249  5381.17    171   1  381                0.00113316   
2344   157.02  4993.55    173   1  315                0.00114355   
2383  160.718  5055.92    176   1  325                0.00115972   
2422  157.729  4913.32    173   1  308                0.00114548   
2461  154.058  4619.37    169   1  338                0.00112597   
2500  149.733   4826.3    164   1  366                0.00111498   
2539  144.028  4449.59    159   1  307                0.00106832   
2578  149.672  4313.72    164   1  304                0.00109734   
2617  147.722  4349.45    162   1  385                0.00108324   
2656  140.963  4186.55    154   1  279                0.00105008   
2695  138.975  4319.84    152   1  371                0.00104429   
2734  141.383  3885.84    156   1  334                0.00104619   
2773  135.741  3865.91    149   1  312                0.00101382   
2812  129.259  4144.07    143   1  279               0.000985203   
2851  125.985  3874.71    137   1  391               0.000962489   
2890  122.109  3722.08    131   1  284               0.000950077   
2929  117.621  3686.01    126   1  262               0.000932369   
2968  111.226  3869.58    122   1  268               0.000910994   
3007   100.81  3974.72    106   1  259               0.000873507   
3046  103.269  4474.31    111   1  285               0.000894803   
3085  97.0806  4458.23    101   1  282               0.000869165   
3124  82.3756  3982.14     76   1  237               0.000794917   
3163   66.625   3622.6     46   1  209                0.00070883   
3202  61.7955  3243.32     44   1  209               0.000650961   
3241  35.5899  1299.22   19.5   1  169               0.000475578   
3280  9.50526    64.65      7   1   38               0.000320065   
3319  3.93182  8.79081      3   1   12               0.000337733   
3358      1.4     0.24      1   1    2                0.00116578   

     Variance Account For Unmasked Median Account For Unmasked  \
1369                   1.84845e-07                  0.00134021   
1408                   1.97555e-07                  0.00132625   
1447                   1.94431e-07                  0.00131492   
1486                   2.18923e-07                  0.00136458   
1525                   2.00489e-07                  0.00135602   
1564                   2.03657e-07                  0.00136192   
1603                   2.19896e-07                   0.0013651   
1642                   1.99741e-07                  0.00136484   
1681                   1.92096e-07                  0.00142332   
1720                   1.91736e-07                  0.00141704   
1759                   1.87798e-07                  0.00138712   
1798                   1.80255e-07                  0.00135759   
1837                   1.74764e-07                  0.00132104   
1876                   1.70734e-07                  0.00133104   
1915                   1.64669e-07                   0.0013251   
1954                   1.65931e-07                  0.00131094   
1993                   1.74892e-07                  0.00129598   
2032                    1.8123e-07                  0.00124396   
2071                   1.73406e-07                  0.00123502   
2110                   1.67442e-07                  0.00125694   
2149                   1.64489e-07                  0.00128939   
2188                   1.54628e-07                  0.00130234   
2227                   1.53008e-07                  0.00125955   
2266                   1.53135e-07                  0.00121839   
2305                    1.6842e-07                  0.00119921   
2344                   1.52084e-07                  0.00119481   
2383                    1.5401e-07                  0.00122681   
2422                   1.44984e-07                  0.00120328   
2461                   1.44417e-07                  0.00116927   
2500                   1.60754e-07                  0.00117117   
2539                   1.38208e-07                  0.00111464   
2578                     1.304e-07                  0.00115069   
2617                   1.40627e-07                  0.00112726   
2656                   1.39364e-07                  0.00108561   
2695                   1.47601e-07                  0.00109134   
2734                   1.42775e-07                  0.00109244   
2773                    1.3131e-07                  0.00105198   
2812                   1.41662e-07                  0.00103306   
2851                    1.3838e-07                  0.00100086   
2890                   1.34387e-07                 0.000981732   
2929                   1.31802e-07                 0.000965056   
2968                   1.49905e-07                 0.000977362   
3007                   1.62617e-07                 0.000922978   
3046                    1.7223e-07                 0.000947416   
3085                   1.85598e-07                 0.000906275   
3124                   1.67861e-07                 0.000837161   
3163                   1.31851e-07                 0.000743501   
3202                   1.40504e-07                 0.000642084   
3241                   1.03691e-07                 0.000411508   
3280                   6.10519e-08                 0.000275287   
3319                   5.03091e-08                 0.000295053   
3358                   2.76313e-07                  0.00105042   

     Min Account For Unmasked Max Account For Unmasked  
1369              4.73373e-05               0.00246002  
1408              4.25613e-05                0.0024737  
1447              2.73284e-05               0.00240224  
1486              3.79406e-05               0.00291986  
1525              3.35785e-05                0.0033204  
1564              3.62174e-05               0.00273065  
1603              5.43311e-05               0.00306679  
1642              5.22848e-05               0.00327527  
1681              5.32226e-05               0.00244012  
1720              3.76563e-05               0.00279942  
1759              5.87441e-05               0.00336659  
1798              7.40393e-05               0.00284787  
1837              0.000117137               0.00275826  
1876               7.3373e-05                0.0033802  
1915              5.66091e-05               0.00236198  
1954              4.35673e-05               0.00257604  
1993              6.10221e-05               0.00261044  
2032              7.23066e-05               0.00305432  
2071              4.51467e-05               0.00270111  
2110               3.6495e-05               0.00273735  
2149              2.68269e-05               0.00251036  
2188              2.43185e-05               0.00312317  
2227              4.09987e-05               0.00335862  
2266              6.63174e-05               0.00238731  
2305              6.12145e-05               0.00368445  
2344              4.85814e-05               0.00250508  
2383              4.28027e-05               0.00214775  
2422              4.41969e-05                0.0021999  
2461              3.72079e-05               0.00264331  
2500              4.26931e-05               0.00282102  
2539              3.68772e-05               0.00250293  
2578              3.48262e-05               0.00229573  
2617              2.88738e-05               0.00349452  
2656              6.04303e-05               0.00313462  
2695              3.79607e-05               0.00352847  
2734              3.81286e-05                0.0030972  
2773              3.32784e-05               0.00259083  
2812              4.11879e-05               0.00269992  
2851              3.66367e-05                0.0032031  
2890              4.71832e-05               0.00271774  
2929              3.09176e-05               0.00310022  
2968               2.2701e-05               0.00328699  
3007              2.51064e-05               0.00301023  
3046              2.80026e-05                0.0029367  
3085              3.80489e-05               0.00337732  
3124                2.575e-05               0.00282776  
3163              2.89402e-05               0.00236443  
3202              2.26644e-05               0.00176419  
3241              2.65675e-05                0.0018033  
3280              2.66439e-05               0.00143054  
3319              8.68432e-05               0.00116178  
3358               0.00059952               0.00216216  

In [ ]: